In [1]:
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import argparse
import os

import numpy as np
import pickle

from keras import backend as K
from keras.callbacks import ModelCheckpoint
from keras.models import Model
from keras.layers import Input
from keras.layers import Conv2D, MaxPooling2D, UpSampling2D, Concatenate, Flatten, Dense, Dropout
from keras.layers import merge
from keras.optimizers import Adam, SGD, RMSprop
from keras.preprocessing.image import list_pictures, array_to_img

from image_ext import list_pictures_in_multidir, load_imgs_asarray
from create_fcn import create_fcn01, create_pupil_net00

np.random.seed(2016)
/home/nakazawa_atsushi/anaconda3/envs/py3/lib/python3.6/site-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.
  from ._conv import register_converters as _register_converters
Using TensorFlow backend.
/home/nakazawa_atsushi/anaconda3/envs/py3/lib/python3.6/importlib/_bootstrap.py:219: RuntimeWarning: compiletime version 3.5 of module 'tensorflow.python.framework.fast_tensor_util' does not match runtime version 3.6
  return f(*args, **kwds)
In [2]:
def load_fnames(paths):
    f = open(paths)
    data1 = f.read()
    f.close()
    lines = data1.split('\n')
    #print(len(lines))
    # 最終行は空行なので消す
    del(lines[len(lines)-1])
    #print(len(lines))
    return lines
In [3]:
def make_fnames(fnames,fpath,fpath_mask,mask_ext):
    fnames_img = [];
    fnames_mask= [];
    
    for i in range(len(fnames)):
        fnames_img.append(fpath + '/' + fnames[i]);
        fnames_mask.append(fpath_mask + '/' + mask_ext + fnames[i]);
        
    return [fnames_img,fnames_mask]
In [4]:
def get_center(im):
    im[im>0] = 1;
    xval = 0
    yval = 0
    npix = 0

    for x in range(0,im.shape[1]):
        xval += (x*sum(im[:,x]))
        npix += sum(im[:,x])
    
    for y in range(0,im.shape[0]):
        yval += (y*sum(im[y,:]))
    
    return [xval/npix,yval/npix]
In [5]:
#
#  MAIN STARTS FROM HERE
#
if __name__ == '__main__':
    
    target_size = (224, 224)
    dpath_this = './'
    dname_checkpoints = 'checkpoints_pupil_net00'
    dname_checkpoints_fcn01 = 'checkpoints_fcn01'
    dname_outputs = 'outputs'
    fname_architecture = 'architecture.json'
    fname_weights = "model_weights_{epoch:02d}.h5"
    fname_stats = 'stats01.npz'
    dim_ordering = 'channels_first'
    fname_history = "history.pkl"

    # definision of mode, LEARN or TEST or SHOW_HISTORY
    mode = "LEARN"
    mode = "SHOW_HISTORY"
    mode = "TEST"

    # モデルを作成
    print('creating model...')
    model_pupil_net = create_pupil_net00(target_size)
creating model...
In [7]:
#
#   LEARNING MODE
#
mode = "LEARN"
if mode == "LEARN":
    # Read Learning Data
    fnames = load_fnames('data/list_train_01.txt')
    [fpaths_xs_train,fpaths_ys_train] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_train = load_imgs_asarray(fpaths_xs_train, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    Y_train = load_imgs_asarray(fpaths_ys_train, grayscale=True, target_size=target_size,
                                dim_ordering=dim_ordering) 

    # Read Validation Data
    fnames = load_fnames('data/list_valid_01.txt')
    [fpaths_xs_valid,fpaths_ys_valid] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_valid = load_imgs_asarray(fpaths_xs_valid, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    Y_valid = load_imgs_asarray(fpaths_ys_valid, grayscale=True, target_size=target_size,
                                dim_ordering=dim_ordering)     

    # obtain center of pupil
    center_train = []
    center_valid = []
    
    for i in range(Y_train.shape[0]):
        center_train.append(get_center(Y_train[i,0,:,:]))

    for i in range(Y_valid.shape[0]):
        center_valid.append(get_center(Y_valid[i,0,:,:]))
    
    center_train = np.array(center_train)
    center_valid = np.array(center_valid)

    print('==> ' + str(len(X_train)) + ' training images loaded')
    print('==> ' + str(len(Y_train)) + ' training masks loaded')
    print('==> ' + str(len(X_valid)) + ' validation images loaded')
    print('==> ' + str(len(Y_valid)) + ' validation masks loaded')

    # 前処理
    print('computing mean and standard deviation...')
    mean = np.mean(X_train, axis=(0, 2, 3))
    std = np.std(X_train, axis=(0, 2, 3))
    print('==> mean: ' + str(mean))
    print('==> std : ' + str(std))

    print('saving mean and standard deviation to ' + fname_stats + '...')
    stats = {'mean': mean, 'std': std}
    if os.path.exists(dname_checkpoints) == 0:
        os.mkdir(dname_checkpoints)
    np.savez(dname_checkpoints + '/' + fname_stats, **stats)
    print('==> done')

    print('globally normalizing data...')
    for i in range(3):
        X_train[:, i] = (X_train[:, i] - mean[i]) / std[i]
        X_valid[:, i] = (X_valid[:, i] - mean[i]) / std[i]
    Y_train /= 255
    Y_valid /= 255
    print('==> done')
==> 1452 training images loaded
==> 1452 training masks loaded
==> 527 validation images loaded
==> 527 validation masks loaded
computing mean and standard deviation...
==> mean: [130.65465  91.2685   76.63643]
==> std : [55.2817   43.990963 43.113483]
saving mean and standard deviation to stats01.npz...
==> done
globally normalizing data...
==> done
In [8]:
    # モデルに学習済のfcn02 Weightをロードする
    epoch = 200
    fname_weights = 'model_weights_%02d.h5'%(epoch)
    model_fcn01 = create_fcn01(target_size)
    fpath_weights_fcn01 = os.path.join(dname_checkpoints_fcn01, fname_weights)
    model_fcn01.load_weights(fpath_weights_fcn01)

    # load weights from Learned U-NET
    layer_names = ['conv1_1','conv1_2','conv2_1','conv2_2']
    
    print('copying layer weights')
    for name in layer_names:
        print(name)
        model_pupil_net.get_layer(name).set_weights(model_fcn01.get_layer(name).get_weights())
        model_pupil_net.get_layer(name).trainable = True
copying layer weights
conv1_1
conv1_2
conv2_1
conv2_2
In [9]:
    # 損失関数,最適化手法を定義
    adam = Adam(lr=1e-5)
    model_pupil_net.compile(optimizer=adam, loss='mean_squared_error')

    # 構造・重みを保存するディレクトリーの有無を確認
    dpath_checkpoints = os.path.join(dpath_this, dname_checkpoints)
    if not os.path.isdir(dpath_checkpoints):
        os.mkdir(dpath_checkpoints)

    # 重みを保存するためのオブジェクトを用意
    fname_weights = "model_weights_{epoch:02d}.h5"
    fpath_weights = os.path.join(dpath_checkpoints, fname_weights)
    checkpointer = ModelCheckpoint(filepath=fpath_weights, save_best_only=False)
In [10]:
    # トレーニングを開始
    print('start training...')
    history = model_pupil_net.fit(X_train, center_train, batch_size=64, epochs=200, verbose=1,
                  shuffle=True, validation_data=(X_valid, center_valid), callbacks=[checkpointer])
start training...
Train on 1452 samples, validate on 527 samples
Epoch 1/200
1452/1452 [==============================] - 64s 44ms/step - loss: 2864.7398 - val_loss: 840.9420
Epoch 2/200
1452/1452 [==============================] - 40s 28ms/step - loss: 587.9776 - val_loss: 299.4057
Epoch 3/200
1452/1452 [==============================] - 39s 27ms/step - loss: 376.2825 - val_loss: 256.6284
Epoch 4/200
1452/1452 [==============================] - 39s 27ms/step - loss: 305.8441 - val_loss: 213.2340
Epoch 5/200
1452/1452 [==============================] - 39s 27ms/step - loss: 254.9423 - val_loss: 184.4808
Epoch 6/200
1452/1452 [==============================] - 39s 27ms/step - loss: 212.8362 - val_loss: 165.5701
Epoch 7/200
1452/1452 [==============================] - 39s 27ms/step - loss: 176.2939 - val_loss: 138.7308
Epoch 8/200
1452/1452 [==============================] - 40s 27ms/step - loss: 140.8368 - val_loss: 117.3145
Epoch 9/200
1452/1452 [==============================] - 39s 27ms/step - loss: 121.7886 - val_loss: 117.5000
Epoch 10/200
1452/1452 [==============================] - 39s 27ms/step - loss: 101.5846 - val_loss: 93.8067
Epoch 11/200
1452/1452 [==============================] - 39s 27ms/step - loss: 88.5011 - val_loss: 84.7924
Epoch 12/200
1452/1452 [==============================] - 39s 27ms/step - loss: 79.2466 - val_loss: 77.2478
Epoch 13/200
1452/1452 [==============================] - 39s 27ms/step - loss: 79.5250 - val_loss: 96.9605
Epoch 14/200
1452/1452 [==============================] - 39s 27ms/step - loss: 70.4500 - val_loss: 73.2247
Epoch 15/200
1452/1452 [==============================] - 39s 27ms/step - loss: 65.3743 - val_loss: 67.0732
Epoch 16/200
1452/1452 [==============================] - 39s 27ms/step - loss: 60.2746 - val_loss: 60.8283
Epoch 17/200
1452/1452 [==============================] - 39s 27ms/step - loss: 57.3404 - val_loss: 58.5230
Epoch 18/200
1452/1452 [==============================] - 39s 27ms/step - loss: 55.7037 - val_loss: 56.8842
Epoch 19/200
1452/1452 [==============================] - 39s 27ms/step - loss: 50.6173 - val_loss: 53.8323
Epoch 20/200
1452/1452 [==============================] - 39s 27ms/step - loss: 48.0131 - val_loss: 52.9843
Epoch 21/200
1452/1452 [==============================] - 39s 27ms/step - loss: 50.8270 - val_loss: 51.0877
Epoch 22/200
1452/1452 [==============================] - 39s 27ms/step - loss: 46.7204 - val_loss: 50.3391
Epoch 23/200
1452/1452 [==============================] - 39s 27ms/step - loss: 43.5265 - val_loss: 49.8851
Epoch 24/200
1452/1452 [==============================] - 39s 27ms/step - loss: 41.1058 - val_loss: 59.2104
Epoch 25/200
1452/1452 [==============================] - 39s 27ms/step - loss: 42.5798 - val_loss: 48.3127
Epoch 26/200
1452/1452 [==============================] - 39s 27ms/step - loss: 39.2961 - val_loss: 45.8521
Epoch 27/200
1452/1452 [==============================] - 39s 27ms/step - loss: 36.6009 - val_loss: 46.3775
Epoch 28/200
1452/1452 [==============================] - 39s 27ms/step - loss: 34.6341 - val_loss: 43.0559
Epoch 29/200
1452/1452 [==============================] - 39s 27ms/step - loss: 34.0460 - val_loss: 46.8167
Epoch 30/200
1452/1452 [==============================] - 39s 27ms/step - loss: 33.5682 - val_loss: 40.9938
Epoch 31/200
1452/1452 [==============================] - 39s 27ms/step - loss: 34.5930 - val_loss: 42.0150
Epoch 32/200
1452/1452 [==============================] - 39s 27ms/step - loss: 31.9532 - val_loss: 47.6228
Epoch 33/200
1452/1452 [==============================] - 39s 27ms/step - loss: 29.6897 - val_loss: 39.7229
Epoch 34/200
1452/1452 [==============================] - 39s 27ms/step - loss: 29.4947 - val_loss: 38.4792
Epoch 35/200
1452/1452 [==============================] - 39s 27ms/step - loss: 28.7295 - val_loss: 44.5153
Epoch 36/200
1452/1452 [==============================] - 39s 27ms/step - loss: 30.4218 - val_loss: 45.8904
Epoch 37/200
1452/1452 [==============================] - 39s 27ms/step - loss: 27.2993 - val_loss: 38.0226
Epoch 38/200
1452/1452 [==============================] - 39s 27ms/step - loss: 27.0773 - val_loss: 43.9208
Epoch 39/200
1452/1452 [==============================] - 40s 27ms/step - loss: 28.1261 - val_loss: 36.2556
Epoch 40/200
1452/1452 [==============================] - 39s 27ms/step - loss: 24.3549 - val_loss: 37.2772
Epoch 41/200
1452/1452 [==============================] - 39s 27ms/step - loss: 24.9974 - val_loss: 38.4868
Epoch 42/200
1452/1452 [==============================] - 39s 27ms/step - loss: 24.0813 - val_loss: 38.2093
Epoch 43/200
1452/1452 [==============================] - 39s 27ms/step - loss: 23.3213 - val_loss: 34.0543
Epoch 44/200
1452/1452 [==============================] - 39s 27ms/step - loss: 21.2911 - val_loss: 35.8390
Epoch 45/200
1452/1452 [==============================] - 39s 27ms/step - loss: 22.2150 - val_loss: 34.6781
Epoch 46/200
1452/1452 [==============================] - 39s 27ms/step - loss: 20.9548 - val_loss: 33.4667
Epoch 47/200
1452/1452 [==============================] - 39s 27ms/step - loss: 19.7461 - val_loss: 33.9373
Epoch 48/200
1452/1452 [==============================] - 39s 27ms/step - loss: 19.8173 - val_loss: 33.2204
Epoch 49/200
1452/1452 [==============================] - 39s 27ms/step - loss: 20.9775 - val_loss: 31.9592
Epoch 50/200
1452/1452 [==============================] - 39s 27ms/step - loss: 20.0845 - val_loss: 31.4790
Epoch 51/200
1452/1452 [==============================] - 39s 27ms/step - loss: 22.4740 - val_loss: 33.1074
Epoch 52/200
1452/1452 [==============================] - 39s 27ms/step - loss: 21.1868 - val_loss: 33.4145
Epoch 53/200
1452/1452 [==============================] - 39s 27ms/step - loss: 18.2226 - val_loss: 30.8845
Epoch 54/200
1452/1452 [==============================] - 39s 27ms/step - loss: 16.1483 - val_loss: 33.0879
Epoch 55/200
1452/1452 [==============================] - 39s 27ms/step - loss: 17.9318 - val_loss: 32.7140
Epoch 56/200
1452/1452 [==============================] - 39s 27ms/step - loss: 17.3338 - val_loss: 30.8163
Epoch 57/200
1452/1452 [==============================] - 39s 27ms/step - loss: 16.1269 - val_loss: 29.9587
Epoch 58/200
1452/1452 [==============================] - 39s 27ms/step - loss: 16.3403 - val_loss: 33.4507
Epoch 59/200
1452/1452 [==============================] - 39s 27ms/step - loss: 14.6264 - val_loss: 31.1322
Epoch 60/200
1452/1452 [==============================] - 39s 27ms/step - loss: 15.8017 - val_loss: 32.3941
Epoch 61/200
1452/1452 [==============================] - 39s 27ms/step - loss: 18.4518 - val_loss: 34.4525
Epoch 62/200
1452/1452 [==============================] - 39s 27ms/step - loss: 14.4005 - val_loss: 30.4873
Epoch 63/200
1452/1452 [==============================] - 39s 27ms/step - loss: 13.7188 - val_loss: 29.2777
Epoch 64/200
1452/1452 [==============================] - 39s 27ms/step - loss: 13.2117 - val_loss: 29.3333
Epoch 65/200
1452/1452 [==============================] - 39s 27ms/step - loss: 12.8321 - val_loss: 28.9619
Epoch 66/200
1452/1452 [==============================] - 39s 27ms/step - loss: 12.4476 - val_loss: 30.3272
Epoch 67/200
1452/1452 [==============================] - 39s 27ms/step - loss: 12.7192 - val_loss: 28.5104
Epoch 68/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.8984 - val_loss: 28.4259
Epoch 69/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.3253 - val_loss: 28.1374
Epoch 70/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.3046 - val_loss: 27.5446
Epoch 71/200
1452/1452 [==============================] - 39s 27ms/step - loss: 10.9635 - val_loss: 27.6733
Epoch 72/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.0387 - val_loss: 27.5581
Epoch 73/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.1123 - val_loss: 31.0523
Epoch 74/200
1452/1452 [==============================] - 39s 27ms/step - loss: 10.5482 - val_loss: 26.8869
Epoch 75/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.9663 - val_loss: 27.9608
Epoch 76/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.6860 - val_loss: 26.9910
Epoch 77/200
1452/1452 [==============================] - 39s 27ms/step - loss: 10.2133 - val_loss: 33.2018
Epoch 78/200
1452/1452 [==============================] - 39s 27ms/step - loss: 11.4590 - val_loss: 29.9368
Epoch 79/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.5129 - val_loss: 26.6430
Epoch 80/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.4839 - val_loss: 26.2167
Epoch 81/200
1452/1452 [==============================] - 39s 27ms/step - loss: 8.9554 - val_loss: 28.6840
Epoch 82/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.2689 - val_loss: 28.5507
Epoch 83/200
1452/1452 [==============================] - 39s 27ms/step - loss: 8.8302 - val_loss: 28.6386
Epoch 84/200
1452/1452 [==============================] - 39s 27ms/step - loss: 8.2174 - val_loss: 28.0584
Epoch 85/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.1636 - val_loss: 26.4133
Epoch 86/200
1452/1452 [==============================] - 39s 27ms/step - loss: 8.5859 - val_loss: 25.7034
Epoch 87/200
1452/1452 [==============================] - 38s 26ms/step - loss: 7.4716 - val_loss: 25.7066
Epoch 88/200
1452/1452 [==============================] - 39s 27ms/step - loss: 8.8821 - val_loss: 28.2971
Epoch 89/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.8718 - val_loss: 27.2062
Epoch 90/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.6830 - val_loss: 27.1726
Epoch 91/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.5748 - val_loss: 25.7821
Epoch 92/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.5918 - val_loss: 25.7033
Epoch 93/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.1808 - val_loss: 26.2263
Epoch 94/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.0463 - val_loss: 26.1555
Epoch 95/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.7430 - val_loss: 27.2684
Epoch 96/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.0753 - val_loss: 31.2263
Epoch 97/200
1452/1452 [==============================] - 39s 27ms/step - loss: 9.6117 - val_loss: 25.9820
Epoch 98/200
1452/1452 [==============================] - 39s 27ms/step - loss: 7.8587 - val_loss: 24.9592
Epoch 99/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.1864 - val_loss: 25.6597
Epoch 100/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.1517 - val_loss: 25.6584
Epoch 101/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.8665 - val_loss: 26.6302
Epoch 102/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.2949 - val_loss: 26.6983
Epoch 103/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.0573 - val_loss: 25.9662
Epoch 104/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.0421 - val_loss: 25.7411
Epoch 105/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.9387 - val_loss: 25.7790
Epoch 106/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.7518 - val_loss: 25.3629
Epoch 107/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.9344 - val_loss: 24.6480
Epoch 108/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.0034 - val_loss: 24.6935
Epoch 109/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.2110 - val_loss: 26.3523
Epoch 110/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.3932 - val_loss: 25.2483
Epoch 111/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.3385 - val_loss: 27.4005
Epoch 112/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.7859 - val_loss: 24.7115
Epoch 113/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.9652 - val_loss: 24.0858
Epoch 114/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.8418 - val_loss: 25.8616
Epoch 115/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.0297 - val_loss: 25.0298
Epoch 116/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.0993 - val_loss: 23.7920
Epoch 117/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.8190 - val_loss: 24.9651
Epoch 118/200
1452/1452 [==============================] - 39s 27ms/step - loss: 5.2390 - val_loss: 24.1555
Epoch 119/200
1452/1452 [==============================] - 39s 27ms/step - loss: 6.1085 - val_loss: 25.4195
Epoch 120/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.7218 - val_loss: 24.0136
Epoch 121/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.4668 - val_loss: 24.3818
Epoch 122/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.1356 - val_loss: 25.1523
Epoch 123/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.9410 - val_loss: 24.0922
Epoch 124/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.9110 - val_loss: 24.8110
Epoch 125/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.3712 - val_loss: 24.7071
Epoch 126/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.8226 - val_loss: 23.9920
Epoch 127/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.7073 - val_loss: 25.0726
Epoch 128/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.4296 - val_loss: 24.2216
Epoch 129/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.7121 - val_loss: 23.6827
Epoch 130/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.5529 - val_loss: 23.7694
Epoch 131/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.3047 - val_loss: 23.4633
Epoch 132/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.0927 - val_loss: 27.0356
Epoch 133/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.2862 - val_loss: 23.7576
Epoch 134/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1345 - val_loss: 23.3091
Epoch 135/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1166 - val_loss: 24.2604
Epoch 136/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.5371 - val_loss: 23.9784
Epoch 137/200
1452/1452 [==============================] - 39s 27ms/step - loss: 4.3635 - val_loss: 23.5317
Epoch 138/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1321 - val_loss: 23.1902
Epoch 139/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.5605 - val_loss: 24.3272
Epoch 140/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.8695 - val_loss: 24.3211
Epoch 141/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1145 - val_loss: 24.0836
Epoch 142/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.6826 - val_loss: 23.5379
Epoch 143/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1410 - val_loss: 23.8115
Epoch 144/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.0801 - val_loss: 23.9463
Epoch 145/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.4130 - val_loss: 23.3728
Epoch 146/200
1452/1452 [==============================] - 38s 26ms/step - loss: 2.6662 - val_loss: 23.6113
Epoch 147/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.5984 - val_loss: 22.8964
Epoch 148/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.8476 - val_loss: 23.8013
Epoch 149/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.0097 - val_loss: 23.6589
Epoch 150/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.1465 - val_loss: 22.7777
Epoch 151/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.0684 - val_loss: 24.3212
Epoch 152/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.9499 - val_loss: 23.8767
Epoch 153/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.6750 - val_loss: 24.8366
Epoch 154/200
1452/1452 [==============================] - 38s 26ms/step - loss: 2.3666 - val_loss: 23.6084
Epoch 155/200
1452/1452 [==============================] - 38s 26ms/step - loss: 2.5506 - val_loss: 22.9761
Epoch 156/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.0676 - val_loss: 23.4032
Epoch 157/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.8805 - val_loss: 23.1646
Epoch 158/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.4509 - val_loss: 24.1005
Epoch 159/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.2679 - val_loss: 25.8126
Epoch 160/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.9769 - val_loss: 24.6167
Epoch 161/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.4776 - val_loss: 23.8247
Epoch 162/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.5067 - val_loss: 24.1215
Epoch 163/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.8504 - val_loss: 23.1020
Epoch 164/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.0833 - val_loss: 23.1704
Epoch 165/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.4886 - val_loss: 23.3854
Epoch 166/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.2861 - val_loss: 24.8453
Epoch 167/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.3478 - val_loss: 25.9026
Epoch 168/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.5960 - val_loss: 23.4208
Epoch 169/200
1452/1452 [==============================] - 40s 28ms/step - loss: 2.1293 - val_loss: 23.0604
Epoch 170/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7692 - val_loss: 23.1123
Epoch 171/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7880 - val_loss: 22.9096
Epoch 172/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.1898 - val_loss: 24.3152
Epoch 173/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.3388 - val_loss: 23.6675
Epoch 174/200
1452/1452 [==============================] - 40s 27ms/step - loss: 2.2746 - val_loss: 24.0896
Epoch 175/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.9833 - val_loss: 23.2336
Epoch 176/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.8633 - val_loss: 23.8369
Epoch 177/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.9572 - val_loss: 23.1680
Epoch 178/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7566 - val_loss: 22.8902
Epoch 179/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7250 - val_loss: 23.8923
Epoch 180/200
1452/1452 [==============================] - 39s 27ms/step - loss: 3.4496 - val_loss: 25.1763
Epoch 181/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.2075 - val_loss: 22.9424
Epoch 182/200
1452/1452 [==============================] - 38s 26ms/step - loss: 1.9192 - val_loss: 23.0991
Epoch 183/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7454 - val_loss: 24.1548
Epoch 184/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.0224 - val_loss: 23.1379
Epoch 185/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.8747 - val_loss: 24.9462
Epoch 186/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.9439 - val_loss: 23.7234
Epoch 187/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.0270 - val_loss: 23.1139
Epoch 188/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.8505 - val_loss: 22.9343
Epoch 189/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.6398 - val_loss: 24.0738
Epoch 190/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.6990 - val_loss: 23.5440
Epoch 191/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.8765 - val_loss: 25.3399
Epoch 192/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.1730 - val_loss: 25.1502
Epoch 193/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.0885 - val_loss: 23.2828
Epoch 194/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7084 - val_loss: 24.7189
Epoch 195/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.4864 - val_loss: 22.9060
Epoch 196/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7685 - val_loss: 23.0106
Epoch 197/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.6882 - val_loss: 23.6542
Epoch 198/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.6531 - val_loss: 23.2945
Epoch 199/200
1452/1452 [==============================] - 39s 27ms/step - loss: 2.4894 - val_loss: 23.2712
Epoch 200/200
1452/1452 [==============================] - 39s 27ms/step - loss: 1.7939 - val_loss: 23.5151
In [11]:
    # Save History
    f = open(dname_checkpoints + '/' + fname_history,'wb')
    pickle.dump(history.history,f)
    f.close
Out[11]:
<function BufferedWriter.close>
In [18]:
#
#  TEST MODE
#
mode = "TEST"
if mode == "TEST":
    # Prediction (test) mode

    # 学習済みの重みをロード
    epoch = 200
    fname_weights = 'model_weights_%02d.h5'%(epoch)
    fpath_weights = os.path.join(dname_checkpoints, fname_weights)
    model_pupil_net.load_weights(fpath_weights)
    print('==> done')

    # Read Test Data
    fnames = load_fnames('data/list_test_01.txt')
    [fpaths_xs_test,fpaths_ys_test] = make_fnames(fnames,'data/img','data/mask','OperatorA_')

    X_test = load_imgs_asarray(fpaths_xs_test, grayscale=False, target_size=target_size,
                                dim_ordering=dim_ordering)
    Y_test = load_imgs_asarray(fpaths_ys_test, grayscale=True, target_size=target_size,
                                dim_ordering=dim_ordering)

    # Yを初期化
    center_test = []
    for i in range(Y_test.shape[0]):
        center_test.append(get_center(Y_test[i,0,:,:]))
    center_test = np.array(center_test)

    # トレーニング時に計算した平均・標準偏差をロード    
    print('loading mean and standard deviation from ' + fname_stats + '...')
    stats = np.load(dname_checkpoints + '/' + fname_stats)
    mean = stats['mean']
    std = stats['std']
    print('==> mean: ' + str(mean))
    print('==> std : ' + str(std))

    for i in range(3):
        X_test[:, i] = (X_test[:, i] - mean[i]) / std[i]
    print('==> done')
==> done
loading mean and standard deviation from stats01.npz...
==> mean: [130.65465  91.2685   76.63643]
==> std : [55.2817   43.990963 43.113483]
==> done
In [19]:
    # テストを開始
    outputs = model_pupil_net.predict(X_test)
In [20]:
    diff = outputs - center_test;
#    print(outputs)
#    print(center_test)
#    print(diff)
#
#    for i in range(diff.shape[0]):
#        print(np.linalg.norm(diff[i,:]))
    print('L2 norm av. = %f'%(np.sum(np.linalg.norm(diff,axis=1))/diff.shape[0]))
L2 norm av. = 6.584461
In [22]:
from PIL import Image
import matplotlib.pyplot as plt

n = 0

for i in range(len(fpaths_xs_test)):
    # テスト画像
    im1 = Image.open(fpaths_xs_test[i])
    im1 = im1.resize(target_size) 

    # Show pupil prediction
    print('(%f,%f) - (%f,%f)(gt): %f'%(outputs[i,0],outputs[i,1],
                                       center_test[i,0],center_test[i,1],
                                       np.linalg.norm(diff[i,:])))
    plt.imshow(im1)
    plt.plot(outputs[i,0], outputs[i,1],'x')
    plt.plot(center_test[i,0], center_test[i,1], '*')
    plt.show()

    n = n + 1
(109.208206,100.339844) - (111.309735,103.340708)(gt): 3.663551
(111.222664,105.442001) - (112.974753,107.091109)(gt): 2.406111
(105.577324,105.161972) - (105.842505,107.641366)(gt): 2.493535
(99.614204,115.011642) - (102.058252,117.005825)(gt): 3.154384
(119.365265,90.594505) - (120.073140,95.132409)(gt): 4.592783
(112.475616,104.317093) - (111.545455,107.818182)(gt): 3.622544
(107.513245,94.295403) - (108.933110,95.879599)(gt): 2.127368
(102.013824,105.896408) - (103.211268,104.617371)(gt): 1.752086
(108.893082,102.855003) - (108.445483,101.947040)(gt): 1.012295
(110.821548,113.216904) - (108.952978,113.705329)(gt): 1.931351
(106.502296,97.650169) - (105.860294,98.577206)(gt): 1.127636
(118.959206,120.132942) - (121.791139,124.873418)(gt): 5.521952
(105.759369,105.106590) - (105.961672,106.968641)(gt): 1.873008
(90.747635,122.772774) - (87.907268,117.987469)(gt): 5.564785
(112.652481,114.037125) - (114.317529,120.750000)(gt): 6.916291
(96.416473,95.204620) - (94.922993,90.000000)(gt): 5.414661
(96.576653,103.242882) - (102.000000,108.798995)(gt): 7.764219
(106.345024,135.507553) - (106.497925,130.091286)(gt): 5.418425
(113.007996,97.146233) - (113.913043,102.138340)(gt): 5.073485
(128.523422,106.496033) - (126.479042,104.323353)(gt): 2.983291
(119.243698,98.580498) - (122.997230,98.842105)(gt): 3.762637
(113.167061,112.137451) - (113.861878,114.348066)(gt): 2.317238
(111.379013,91.952599) - (112.614130,97.353261)(gt): 5.540096
(106.046303,115.908150) - (108.570611,116.595420)(gt): 2.616194
(118.819031,110.506950) - (119.943289,113.109641)(gt): 2.835129
(101.332848,86.851852) - (102.921665,94.616891)(gt): 7.925917
(115.775612,90.909477) - (118.401225,95.424196)(gt): 5.222694
(117.802971,99.008888) - (116.422619,104.589286)(gt): 5.748583
(113.176399,100.871620) - (114.238532,103.655963)(gt): 2.980049
(112.256966,96.642189) - (115.235521,99.293436)(gt): 3.987594
(112.228348,99.073921) - (115.996678,99.292359)(gt): 3.774656
(112.701080,100.121140) - (112.587459,100.029703)(gt): 0.145844
(118.388832,108.258873) - (115.992157,109.796078)(gt): 2.847289
(104.911850,105.458679) - (111.972973,101.506757)(gt): 8.091795
(116.072945,117.853485) - (122.140097,115.572464)(gt): 6.481774
(108.824326,106.840225) - (106.988701,107.522599)(gt): 1.958355
(115.453491,96.389969) - (117.615854,102.129573)(gt): 6.133422
(82.661362,95.220711) - (79.950581,90.715116)(gt): 5.258204
(98.694855,108.625023) - (107.520833,111.215278)(gt): 9.198224
(106.330658,102.493744) - (112.250000,106.767045)(gt): 7.300665
(110.800240,88.882301) - (117.200000,89.702326)(gt): 6.452083
(109.262085,92.643517) - (110.382263,97.159021)(gt): 4.652374
(135.432251,108.451042) - (132.092958,110.704225)(gt): 4.028364
(96.656158,95.358437) - (100.672489,102.122271)(gt): 7.866407
(110.755035,102.862930) - (116.065574,106.092896)(gt): 6.215665
(103.611458,109.112915) - (102.483871,103.937788)(gt): 5.296545
(118.062790,107.783463) - (116.876147,107.474771)(gt): 1.226137
(94.182320,84.864601) - (95.938596,89.973684)(gt): 5.402521
(119.344688,106.302544) - (118.081944,109.368056)(gt): 3.315401
(112.849991,96.408813) - (113.155039,98.844961)(gt): 2.455172
(110.686867,112.207870) - (107.191781,110.633562)(gt): 3.833285
(106.268791,97.420868) - (107.056277,100.268398)(gt): 2.954414
(107.770409,102.193230) - (110.036913,102.892617)(gt): 2.371958
(107.421738,102.001305) - (104.120521,102.348534)(gt): 3.319427
(113.446907,115.409729) - (112.830325,117.202166)(gt): 1.895522
(111.767189,110.034637) - (106.272727,110.538961)(gt): 5.517559
(117.041290,96.137062) - (111.787140,98.403548)(gt): 5.722155
(104.960556,105.868370) - (102.147870,106.095238)(gt): 2.821821
(109.364258,108.878014) - (106.883158,102.845263)(gt): 6.523031
(116.335419,102.879608) - (116.605926,103.405926)(gt): 0.591764
(114.304451,98.925987) - (107.443243,99.183784)(gt): 6.866049
(108.710228,104.946487) - (108.008929,102.000000)(gt): 3.028797
(109.147316,102.486015) - (108.628159,101.765343)(gt): 0.888196
(109.952141,109.684128) - (108.701863,110.338509)(gt): 1.411173
(103.078148,103.390312) - (108.462783,103.498382)(gt): 5.385720
(108.606438,110.076828) - (103.164706,102.247059)(gt): 9.535079
(109.340797,98.606415) - (110.988764,100.814607)(gt): 2.755341
(118.394150,103.734406) - (117.012097,102.155242)(gt): 2.098530
(119.055794,113.327377) - (121.339286,104.048214)(gt): 9.556003
(98.055656,101.423813) - (102.694878,104.734967)(gt): 5.699659
(131.100418,105.261139) - (134.161103,99.708273)(gt): 6.340514
(104.767189,113.523384) - (102.916168,111.532934)(gt): 2.718119
(111.357430,105.488739) - (113.694779,104.742972)(gt): 2.453441
(110.856255,109.074974) - (107.085603,109.933852)(gt): 3.867232
(117.149948,119.173477) - (119.680702,110.835088)(gt): 8.713980
(107.405190,97.735222) - (107.185874,95.427509)(gt): 2.318111
(112.138466,123.800026) - (113.196891,121.243523)(gt): 2.766942
(98.408043,106.137917) - (103.472603,105.541096)(gt): 5.099604
(108.454994,90.303886) - (112.351010,89.896465)(gt): 3.917261
(80.139336,82.527351) - (76.375000,77.304054)(gt): 6.438405
(117.723312,108.428345) - (112.689349,97.112426)(gt): 12.385104
(95.232338,89.447685) - (96.862338,89.794805)(gt): 1.666551
(96.606873,104.145187) - (100.307692,109.663462)(gt): 6.644352
(102.505829,111.837509) - (105.084112,109.014019)(gt): 3.823564
(122.605225,113.316147) - (125.202429,111.595142)(gt): 3.115659
(105.939354,101.279076) - (107.670927,100.568690)(gt): 1.871628
(118.641495,112.128304) - (115.314985,107.058104)(gt): 6.064041
(92.106171,101.147697) - (95.422857,100.565714)(gt): 3.367360
(110.587746,107.438385) - (112.767956,106.850829)(gt): 2.257994
(110.412590,106.184288) - (112.320641,101.685371)(gt): 4.886810
(110.502373,109.615044) - (114.792105,105.976316)(gt): 5.625135
(109.023521,113.193481) - (106.175691,107.754696)(gt): 6.139261
(133.509003,106.737473) - (136.312112,96.562112)(gt): 10.554401
(106.238731,99.316521) - (102.202703,97.378378)(gt): 4.477267
(107.437157,102.345436) - (106.601695,97.652542)(gt): 4.766681
(106.612389,100.598892) - (105.928571,101.485714)(gt): 1.119848
(101.390503,102.012711) - (107.470588,102.211765)(gt): 6.083343
(106.227440,103.922722) - (106.821429,100.324675)(gt): 3.646747
(113.888168,100.763443) - (114.920792,95.500000)(gt): 5.363781
(108.422577,113.726395) - (113.432000,115.016000)(gt): 5.172756
(96.590263,102.298996) - (81.404598,99.140230)(gt): 15.510714
(96.382812,99.306694) - (106.671024,100.385621)(gt): 10.344630
(116.297058,101.150200) - (114.815789,99.228070)(gt): 2.426672
(87.450867,94.086525) - (102.059871,83.940129)(gt): 17.786859
(101.068398,116.680420) - (110.030534,121.534351)(gt): 10.192181
(79.155388,119.680031) - (71.130435,129.695652)(gt): 12.834038
(112.015762,123.632027) - (122.349693,121.466258)(gt): 10.558441
(109.260208,100.622925) - (113.976261,100.643917)(gt): 4.716100
(113.755966,105.057915) - (117.853526,107.258590)(gt): 4.651125
(83.391563,124.214821) - (84.121547,123.110497)(gt): 1.323785
(120.495659,115.651413) - (114.251572,103.031447)(gt): 14.080205
(109.100075,96.969460) - (106.514523,103.672199)(gt): 7.184135
(106.363373,109.503685) - (102.507640,119.899830)(gt): 11.088125
(103.759811,102.874443) - (112.951302,110.371461)(gt): 11.861230
(105.560112,99.609337) - (109.185699,105.565635)(gt): 6.972974
(122.706833,106.121895) - (114.110236,103.803150)(gt): 8.903822
(116.655327,100.268784) - (117.399160,98.970588)(gt): 1.496195
(123.623978,124.787270) - (106.376426,108.228137)(gt): 23.909892
(113.678596,109.863701) - (104.241379,110.462069)(gt): 9.456168
(123.092621,108.933525) - (111.865546,103.676471)(gt): 12.396928
(121.008469,113.830482) - (120.265152,113.939394)(gt): 0.751254
(122.502914,133.798096) - (119.036810,125.276074)(gt): 9.199932
(97.413162,93.686783) - (92.002137,105.104701)(gt): 12.635191
(93.001030,110.919823) - (83.927052,120.039514)(gt): 12.864908
(119.925270,87.860069) - (136.448366,92.339869)(gt): 17.119618
(98.378632,88.553871) - (95.004202,100.162465)(gt): 12.089095
(119.974510,116.883461) - (101.137931,98.114943)(gt): 26.590863
(117.975967,113.942581) - (105.086957,103.722826)(gt): 16.449012
(129.716431,111.258293) - (126.721698,108.476415)(gt): 4.087453
(125.367920,105.668114) - (113.634146,113.556911)(gt): 14.139114
(130.215988,112.048820) - (117.893836,109.955479)(gt): 12.498701
(110.409576,113.176559) - (96.370213,118.885106)(gt): 15.155568
(114.076439,113.596474) - (104.073620,110.717791)(gt): 10.408804
(111.186882,109.637711) - (111.754797,119.204691)(gt): 9.583822
(92.966576,92.743523) - (100.650235,101.403756)(gt): 11.577489
(102.606071,94.276115) - (104.551559,102.806954)(gt): 8.749865
(103.607101,105.772217) - (108.163531,109.085384)(gt): 5.633660
(110.620384,99.821846) - (117.465116,113.732558)(gt): 15.503492
(114.644951,107.986465) - (111.442149,111.326446)(gt): 4.627463
(107.241898,116.720055) - (98.814815,117.763889)(gt): 8.491485
(101.992622,90.491096) - (99.285714,103.200680)(gt): 12.994648
(100.498131,91.037941) - (102.051136,102.872159)(gt): 11.935684
(102.626572,96.716881) - (110.427419,101.979839)(gt): 9.410205
(104.397537,105.629143) - (138.255319,87.122340)(gt): 38.585634
(106.504417,90.088699) - (102.744035,90.401302)(gt): 3.773354
(99.856361,103.525291) - (94.543641,116.461347)(gt): 13.984510
(130.980560,113.502190) - (122.009296,114.256308)(gt): 9.002904
(92.369476,117.095650) - (92.004678,117.963743)(gt): 0.941628
(111.892532,114.947701) - (111.767742,124.064516)(gt): 9.117670
(120.800499,122.677856) - (112.315000,130.100000)(gt): 11.273505
(112.833008,99.512802) - (103.838565,101.264574)(gt): 9.163444
(119.770935,108.372261) - (109.091205,114.495114)(gt): 12.310400
(120.159752,98.615829) - (106.748447,108.760870)(gt): 16.816210
(95.137039,106.466782) - (78.333333,118.287879)(gt): 20.545142
(108.018288,96.295784) - (104.675325,97.272727)(gt): 3.482789
(111.396873,108.817978) - (108.099125,106.119534)(gt): 4.261073
(122.330086,108.201782) - (115.779817,104.006881)(gt): 7.778382
(91.224274,108.051025) - (80.674938,105.617866)(gt): 10.826299
(125.500359,86.196571) - (119.827633,88.190150)(gt): 6.012834
(112.804451,112.090660) - (108.471591,109.090909)(gt): 5.269932
(110.740471,110.441071) - (112.920354,109.438053)(gt): 2.399570
(111.281555,111.784500) - (109.100000,110.868750)(gt): 2.365963
(118.655899,114.986557) - (113.323077,115.530769)(gt): 5.360519
(117.123665,109.077553) - (107.872340,105.736170)(gt): 9.836251
(113.115707,106.045135) - (111.210280,113.067757)(gt): 7.276528
(91.547028,90.939499) - (98.471204,97.895288)(gt): 9.814643
(110.202377,102.993011) - (108.948905,100.289538)(gt): 2.979927
(98.597717,97.985893) - (96.532164,94.715400)(gt): 3.868157
(117.209312,105.905083) - (120.727124,104.555556)(gt): 3.767788
(102.442123,99.683517) - (100.225266,98.627093)(gt): 2.455705
(103.866760,108.523399) - (109.167742,101.238710)(gt): 9.009279
(107.924919,125.441170) - (107.528846,121.014423)(gt): 4.444430
(104.332100,101.507111) - (101.856618,105.419118)(gt): 4.629450
(100.009605,102.061890) - (99.530488,102.676829)(gt): 0.779554
(120.661324,101.181618) - (116.844221,100.027638)(gt): 3.987724
(104.851250,92.749817) - (104.529545,96.352273)(gt): 3.616792
(111.226608,111.004578) - (105.654762,109.162698)(gt): 5.868389
(106.675461,113.318329) - (100.354023,113.721839)(gt): 6.334303
(122.738785,99.549538) - (116.238213,101.004963)(gt): 6.661508
(105.263283,92.991470) - (98.321346,98.997680)(gt): 9.179599
(114.516106,102.642128) - (112.308426,100.562798)(gt): 3.032732
(115.696190,104.539429) - (104.056075,102.028037)(gt): 11.907954
(113.537033,114.745712) - (107.355072,107.654589)(gt): 9.407479
(109.695061,111.189651) - (108.246241,109.531955)(gt): 2.201599
(114.475983,107.390671) - (109.595782,111.446397)(gt): 6.345492
(116.748680,101.341759) - (114.296524,104.398773)(gt): 3.918981
(115.817154,109.466454) - (107.373162,99.750000)(gt): 12.872858
(90.588478,91.393517) - (92.473684,103.218045)(gt): 11.973866
(108.087830,102.122658) - (103.021448,99.554960)(gt): 5.679903
(106.713379,100.400024) - (106.552538,97.696576)(gt): 2.708229
(132.245346,107.152580) - (128.354839,108.712067)(gt): 4.191425
(98.465546,119.676277) - (102.814575,101.879352)(gt): 18.320606
(121.051567,101.267693) - (133.346561,105.611111)(gt): 13.039638
(118.298286,88.129890) - (120.732432,92.945946)(gt): 5.396245
(122.361809,97.617554) - (123.638191,100.040201)(gt): 2.738315
(117.465782,110.075745) - (115.053512,106.234114)(gt): 4.536207
(123.348755,105.856689) - (118.018692,101.611215)(gt): 6.814223
(115.210968,111.827362) - (109.877717,110.216033)(gt): 5.571350
(103.188995,97.170906) - (109.220651,100.386139)(gt): 6.835100
(107.355637,106.538750) - (107.280000,108.122222)(gt): 1.585278
(111.959900,108.960747) - (113.256809,108.727626)(gt): 1.317695
(115.824509,108.882515) - (113.320312,110.746094)(gt): 3.121526
(118.940582,106.576302) - (123.029549,110.477449)(gt): 5.651425
(118.151237,120.157532) - (116.800414,121.154085)(gt): 1.678643
(107.159157,110.831657) - (102.196850,111.976378)(gt): 5.092629
(103.420609,120.020447) - (102.422680,119.422680)(gt): 1.163265
(100.245689,93.285744) - (102.506770,99.137331)(gt): 6.273241
(116.404778,93.945969) - (112.284605,96.359638)(gt): 4.775104
(102.897301,87.638779) - (109.356322,94.045977)(gt): 9.097865
(114.952560,107.844193) - (113.557143,110.452381)(gt): 2.958012
(108.942360,99.493202) - (107.529703,103.990099)(gt): 4.713563
(108.533615,100.256409) - (114.056225,98.847390)(gt): 5.699522
(100.554276,113.397110) - (100.804688,115.023438)(gt): 1.645493
(83.249451,105.060898) - (88.260563,108.739437)(gt): 6.216341
(92.013748,85.625854) - (92.798054,81.812652)(gt): 3.893026
(117.516457,94.497177) - (109.455021,95.127615)(gt): 8.086050
(105.568779,97.076256) - (109.686275,100.692810)(gt): 5.480259
(117.430984,110.912384) - (113.023697,110.113744)(gt): 4.479064
(104.608330,90.391647) - (103.765237,88.462754)(gt): 2.105097
(100.889359,111.332153) - (101.019192,107.475758)(gt): 3.858581
(114.920616,106.225441) - (111.568306,111.207650)(gt): 6.005031
(111.284203,116.675797) - (105.763636,108.127273)(gt): 10.176144
(108.073883,112.397957) - (111.778646,116.359375)(gt): 5.423846
(113.117249,110.579514) - (115.556948,107.312073)(gt): 4.077781
(113.852821,97.574203) - (108.683544,99.234177)(gt): 5.429267
(109.417854,103.413086) - (106.180995,97.823529)(gt): 6.459133
(126.365013,99.339584) - (121.248421,99.120000)(gt): 5.121302
(91.226524,93.624420) - (86.382824,78.333333)(gt): 16.039912
(98.536591,101.188339) - (107.248447,106.018634)(gt): 9.961335
(116.378471,122.575607) - (121.671717,116.429293)(gt): 8.111451
(110.213234,106.305000) - (112.631970,102.204461)(gt): 4.760747
(105.963188,100.335922) - (103.300099,96.914116)(gt): 4.335989
(104.446854,105.669739) - (111.196429,106.446429)(gt): 6.794116
(108.381355,102.619949) - (109.801370,100.376712)(gt): 2.654911
(133.468338,101.660477) - (147.383948,102.258134)(gt): 13.928438
(122.977081,86.926781) - (125.927126,83.225371)(gt): 4.733201
(103.570251,105.385841) - (110.400000,105.507143)(gt): 6.830826
(92.244858,105.596756) - (95.372671,104.260870)(gt): 3.401148
(107.944817,95.667557) - (107.190871,102.821577)(gt): 7.193639
(108.788498,102.740913) - (110.800235,101.280846)(gt): 2.485736
(116.407700,120.845665) - (109.479042,112.305389)(gt): 10.997391
(109.507629,103.520256) - (100.789720,102.261682)(gt): 8.808289
(145.085236,116.196297) - (135.345622,124.301843)(gt): 12.671226
(98.223328,78.089531) - (105.040616,82.471989)(gt): 8.104404
(118.597923,133.822510) - (114.053435,139.259542)(gt): 7.086162
(114.358925,99.894264) - (99.725225,90.513514)(gt): 17.382280
(101.076965,94.534981) - (98.086655,104.190641)(gt): 10.108102
(113.152290,103.645576) - (113.412409,104.911192)(gt): 1.292070
(110.595482,106.796730) - (103.398810,107.488095)(gt): 7.229805
(98.349754,100.780121) - (101.058140,97.482558)(gt): 4.267232
(130.032791,105.748795) - (116.251509,113.670020)(gt): 15.895583
(106.669579,110.232155) - (115.923337,106.665163)(gt): 9.917433
(108.885925,116.570595) - (94.465608,122.915344)(gt): 15.754408
(113.144104,91.000267) - (102.449612,94.418605)(gt): 11.227519
(107.686447,108.402473) - (105.464789,108.295775)(gt): 2.224219
(117.406509,122.161949) - (115.790749,116.903084)(gt): 5.501486
(114.081955,102.678947) - (110.373333,104.066667)(gt): 3.959752
(111.132278,114.811218) - (109.255507,112.881057)(gt): 2.692173
(123.059090,103.612427) - (118.518447,105.215534)(gt): 4.815329
(95.426552,94.726318) - (94.434462,94.027982)(gt): 1.213225
(102.985382,106.621803) - (113.932773,112.915966)(gt): 12.627821
(97.070961,117.249313) - (91.414330,121.155763)(gt): 6.874433
(96.573853,98.955276) - (95.055381,98.539185)(gt): 1.574448
(105.808922,104.107101) - (97.776144,108.736928)(gt): 9.271506
(103.604439,104.043640) - (102.032895,102.802632)(gt): 2.002462
(112.284096,114.621803) - (105.822464,107.945652)(gt): 9.291054
(111.424934,103.278427) - (105.266388,99.581590)(gt): 7.182917
(127.699516,96.300270) - (123.885977,97.788952)(gt): 4.093807
(108.416008,107.819496) - (118.638191,115.457286)(gt): 12.760441
(103.407959,117.670616) - (102.669856,117.351675)(gt): 0.804064
In [24]:
#
#   Show History
#
mode = 'SHOW_HISTORY'
if mode == "SHOW_HISTORY":
    # load pickle
    print(dname_checkpoints + '/' + fname_history)
    history = pickle.load(open(dname_checkpoints + '/' + fname_history, 'rb'))
    
    for k in history.keys():
        plt.plot(history[k])
        plt.title(k)
        plt.show()
checkpoints_pupil_net00/history.pkl